Exercises on tree dormancy
As temperature requirements are dependent on the cultivar, new data about the date of dormancy has to be collected with an experimental approach first. If this data was already collected by an institute one can calculate the chilling and forcing period by using weather data and statistical methods such as a partial least square (PLS) regression analysis (Luedeling, Kunz, and Blanke 2013). With this method the chilling phase can be calculated using the dynamic model and the forcing phase is calculated using growing degree dates (GDD). Knowing in which specific timespan chilling and heat is especially important for a cultivar a fitting one can be selected for the region. Site-specific knowledge is also essential to pick future-proof cultivars that are already adapted for global warming.
Considering the two last millenniums up to a few decades ago the global climate was quite cold on average. Looking at the last decade the the average temperature keeps rising above every record previously know. The Problem with the increasing temperatures are not only direct effects, but positive feedback such as melting ice caps or defrosting permafrost landscapes. With increasing temperature the surface albedo changes and will absorb even more radiation or frozen methane is being released which will increase the warming even more. While such periods exist in earth history already, they were spread across million years. This time gave enough time for evolution to adapt and move across the planet. What usually happens on such a large temporal scale is now happening within a decade with no time for such adaptation.
The Representative Concentration Pathways (RCPs) are possible climate scenarios simulating the impact of different GHG emission pathways from 2000 to 2100. The number after RCP (e.g. RCP6.0) stands for the additional expected radiative forcing (W m-2) by 2100. They are large scale simulations from which other studies derive their data to calculate global climate models and with some model based downscaling (shouldn’t it be upscaling?) regional climate models.
Here is an extensive overview of these three methods:
Source: Luedeling et al. (2014)
test
To evaluate the performance of different methods all these exercises where combined and extended in the following script.
library(chillR)
library(microbenchmark)
Winters_hours_gaps <- Winters_hours_gaps
# it seems like ifelse() is actually quite slow. The dplyr version if_else() is faster, but not as versatile
warm_hours_ifelse <- function(num_vec){
warm_hours_vec <- ifelse(num_vec > 25, TRUE, FALSE)
return(warm_hours_vec)
}
# Efficient way to calculate a boolean vector (neglects NAs, but still more efficient with NA test.)
warm_hours_rep <- function(num_vec){
warm_hours_vec <- rep(FALSE, length(num_vec))
warm_hours_vec[num_vec > 25] <- TRUE
#warm_hours_vec[is.na(num_vec)] <- NA
return(warm_hours_vec)
}
microbenchmark(
warm_hours_rep = {warm_hours_rep(Winters_hours_gaps$Temp)},
warm_hours_ifelse = {warm_hours_ifelse(Winters_hours_gaps$Temp)},
times = 10000
)
## Unit: microseconds
## expr min lq mean median uq max neval cld
## warm_hours_rep 20.518 21.511 32.51891 22.011 23.990 5194.543 10000 a
## warm_hours_ifelse 67.828 70.753 114.49445 72.256 78.367 154523.169 10000 b
# Input single number which then is transformed to fit current table schema
# Advantages: no hassle with POSIXct
filter_temp_on_num_dates <- function(df, start_date, end_date){
start_year<-trunc(start_date/1000000)
start_month<-trunc((start_date-start_year*1000000)/10000)
start_day<-trunc((start_date-start_year*1000000-start_month*10000) / 100)
start_hour<-start_date-start_year*1000000-start_month*10000-start_day*100
end_year<-trunc(end_date/1000000)
end_month<-trunc((end_date-end_year*1000000)/10000)
end_day<-trunc((end_date-end_year*1000000-end_month*10000) / 100)
end_hour<-end_date-end_year*1000000-end_month*10000-end_day*100
start_row <- which((df$Year == start_year) &
(df$Month == start_month) &
(df$Day == start_day) &
(df$Hour == start_hour))
end_row <- which((df$Year == end_year) &
(df$Month == end_month) &
(df$Day == end_day) &
(df$Hour == end_hour))
return(sum(warm_hours_rep(df[start_row:end_row,]$Temp)))
}
# Input POSIXct dates, which are then transform to fit current table schema
# Comment: No real performance advantage but enables standardized POSIXct input
# which may help for the transition
filter_temp_on_dates <- function(df, start_date, end_date){
start_date <- as.POSIXlt(start_date)
end_date <- as.POSIXlt(end_date)
start_year = 1900 + start_date$year
start_month = start_date$mon + 1 # Valuerange is 0-12
start_day = start_date$mday
start_hour = start_date$hour
end_year = 1900 + end_date$year
end_month = end_date$mon +1
end_day = end_date$mday
end_hour = end_date$hour
start_row <- which((df$Year == start_year) &
(df$Month == start_month) &
(df$Day == start_day) &
(df$Hour == start_hour))
end_row <- which((df$Year == end_year) &
(df$Month == end_month) &
(df$Day == end_day) &
(df$Hour == end_hour))
return(sum(warm_hours_rep(df[start_row:end_row,]$Temp)))
}
# Filter with POSIXct dates on a POSIXct date column
# Comment: Easy to understand, fast to program and
# if exact start or end date/hour isn't available it still works
filter_temp_on_dates_with_dates <- function(df, start_date, end_date){
return(sum(warm_hours_rep(df[df$date >= start_date & df$date <= end_date])))
}
# Preparation
start_date = as.POSIXct("2008030400", tz = "UTC", tryFormats = c("%Y%m%d", "%Y%m%d%H"))
end_date = as.POSIXct("2008063023", tz = "UTC", tryFormats = c("%Y%m%d", "%Y%m%d%H"))
start_date_num = 2008030400
end_date_num = 2008053023
Winters_hours_gaps_real_dates <- within(Winters_hours_gaps,
Date <- as.POSIXct(
sprintf("%d-%02d-%02d %02d:00", Year, Month, Day, Hour))
)[,c("Date","Temp_gaps", "Temp")]
res_benchmark <- microbenchmark(
using_numbers = {
filter_temp_on_num_dates(Winters_hours_gaps, start_date_num, end_date_num)
},
using_dates = {
filter_temp_on_dates(Winters_hours_gaps, start_date, end_date)
},
using_dates_on_dates = {
filter_temp_on_dates_with_dates(Winters_hours_gaps_real_dates, start_date, end_date)
},
times = 100000
)
print(res_benchmark, signif = 3)
## Unit: microseconds
## expr min lq mean median uq max neval cld
## using_numbers 222.0 232.0 311.0 237.0 251.0 163000 1e+05 b
## using_dates 247.0 259.0 348.0 264.0 280.0 159000 1e+05 c
## using_dates_on_dates 46.9 53.3 59.8 56.5 61.4 3730 1e+05 a
require(chillR)
?Chilling_Hours
Chilling_Hours
## function (HourTemp, summ = TRUE)
## {
## CH_range <- which(HourTemp <= 7.2 & HourTemp >= 0)
## CH_weights <- rep(0, length(HourTemp))
## CH_weights[CH_range] <- 1
## if (summ == TRUE)
## return(cumsum(CH_weights))
## else return(CH_weights)
## }
## <bytecode: 0x555cb6a198f0>
## <environment: namespace:chillR>
#Chilling_Hours(Winters_hours_gaps$Temp, summ = FALSE)
plot(Utah_Model(Winters_hours_gaps$Temp, summ = TRUE))
own_step_model <- function (HourTemp, summ = TRUE){
return(step_model(HourTemp,
df = data.frame(lower = c(-1000, 1.5, 2.5, 9, 12.5, 16, 18),
upper = c(1.5, 2.5, 9, 12.5, 16, 18, 1000),
weight = c(0, 0.5, 1, 0.5, 0, -0.5, -1)),
summ = summ))
}
own_step_model(Winters_hours_gaps$Temp, summ = TRUE)[1:100]
## [1] 0.0 -0.5 -1.5 -2.5 -3.5 -4.5 -5.5 -6.0 -6.0 -6.0 -5.5 -5.0 -4.5 -3.5 -2.5
## [16] -1.5 -0.5 0.0 1.0 2.0 3.0 4.0 4.5 4.5 4.5 3.5 2.5 1.5 0.5 -0.5
## [31] -1.5 -2.5 -3.0 -3.0 -2.5 -2.0 -1.5 -1.0 0.0 0.5 1.0 1.5 1.5 2.0 2.5
## [46] 3.0 3.5 3.5 3.5 3.0 2.0 1.0 0.0 -1.0 -2.0 -3.0 -3.5 -3.5 -3.0 -2.5
## [61] -1.5 -0.5 0.5 1.5 2.5 3.5 4.5 5.5 6.5 7.5 8.5 9.0 9.0 8.5 8.0
## [76] 7.5 7.0 6.5 6.0 5.5 5.5 6.0 6.5 7.0 8.0 9.0 10.0 11.0 12.0 13.0
## [91] 14.0 15.0 16.0 17.0 17.5 17.5 17.5 17.0 16.0 15.0
#Dynamic_Model(Winters_hours_gaps$Temp, summ = TRUE)
#chilling(make_JDay(Winters_hours_gaps))
#chilling(make_JDay(Winters_hours_gaps), Start_JDay = 100, End_JDay = 200)
tempResponse(make_JDay(Winters_hours_gaps),
Start_JDay = 100,
End_JDay = 200,
models = list(Chilling_Hours = Chilling_Hours,
Utah_Chill_Units = Utah_Model,
Chill_Portions = Dynamic_Model,
GDH = GDH,
own_step_model = own_step_model))
## Season End_year Season_days Data_days Perc_complete Chilling_Hours
## 1 2007/2008 2008 101 101 100 50
## Utah_Chill_Units Chill_Portions GDH own_step_model
## 1 -1332.5 2.255033 31284.32 -1325
require(chillR)
require(reshape2)
require(ggplot2)
sun_path <- data.frame(JDay = 1:365,daylength(41.149178, 1:365))
sun_path_long <- melt(sun_path, id = c("JDay"))
sun_path_plot <- ggplot(sun_path_long, aes(x = JDay, y = value))+
geom_line(aes(colour = variable))+
xlab("Julian day")+
ylab("Hours")+
theme_bw(base_size = 15)
sun_path_plot
# Create idealized temperature
hour_temps_CKA_ideal <- stack_hourly_temps(KA_weather, 51)
# Create empirical based interpolated temperature data
temp_coeffs <- Empirical_daily_temperature_curve(Winters_hours_gaps)
Winter_daily <- make_all_day_table(Winters_hours_gaps, input_timestep="hour")
hour_temps_winter_emp <- Empirical_hourly_temperatures(Winter_daily, temp_coeffs)
# Abandoned project
# # Self excerise: Interpolating data for a Fjäll in sweden (historical hourly data available)
# fjaell_a_hourly <- read.csv2(file = "data/smhi-opendata_1_112540_20211031_164311.csv",header = TRUE, dec = ".", sep = ";", quote = "", skip = 10)
# fjaell_a_hourly$date_time <- as.POSIXct(paste(fjaell_a_hourly$Datum, fjaell_a_hourly$"Tid..UTC."), tz = "UTM")
# names(fjaell_a_hourly)[names(fjaell_a_hourly)=="Lufttemperatur"] <- "Temp"
#
# ggplot(fjaell_a_hourly, aes(x = date_time, y = Temp))+
# geom_line()+
# xlab("timeline")+
# ylab("air temperate (°C)")+
# theme_bw(base_size = 14)
#
#
# require(data.table)
# library(data.table)
# test <- c(as.POSIXct("1985-05-01 06:00:00", tz = "UTC"), as.POSIXct("1985-05-01 12:00:00", tz = "UTC"))
# year(test)
#
# fjaell_a_hourly <- data.frame(fjaell_a_hourly,
# Year = year(fjaell_a_hourly$date_time),
# Month = month(fjaell_a_hourly$date_time),
# Day = mday(fjaell_a_hourly$date_time),
# Hour = hour(fjaell_a_hourly$date_time))
#
#
# fjaell_a_hourly_to_day <- make_all_day_table(fjaell_a_hourly, timestep = "hour")
#
# #fjaell_a_hourly_inter <- interpolate_gaps_hourly(fjaell_a_hourly_to_day, latitude = 61.8)
# fjaell_a_hourly_inter <- read.csv("data/fjaell_a_hourly_data_interpolated.csv")
# fjaell_a_hourly_inter$date_time <- as.POSIXct(fjaell_a_hourly_inter$date_time)
# write.csv(fjaell_a_hourly_inter$weather, file = "data/fjaell_a_hourly_data_interpolated.csv", row.names = FALSE)
# #write.csv(fjaell_a_hourly_inter$daily_patch_report, file = "data/fjaell_a_hourly_data_interpolated_quality_check.csv", row.names = FALSE)
#
# nrow(fjaell_a_hourly_inter[fjaell_a_hourly_inter$Tmin_source == "interpolated",]) + nrow(fjaell_a_hourly_inter[fjaell_a_hourly_inter$Tmax_source == "interpolated",])
# nrow(fjaell_a_hourly_inter[fjaell_a_hourly_inter$Tmin_source == "solved",]) + nrow(fjaell_a_hourly_inter[fjaell_a_hourly_inter$Tmax_source == "solved",])
# nrow(fjaell_a_hourly_inter[is.na(fjaell_a_hourly_inter$Tmin_source),]) + nrow(fjaell_a_hourly_inter[is.na(fjaell_a_hourly_inter$Tmax_source),])
#
# ggplot(fjaell_a_hourly_inter, aes(x = date_time, y = Temp))+
# geom_line()+
# ggtitle("Weatherstation \"Fjäll A\" from Schweden (Latitude = 61.8)")+
# xlab("timeline")+
# ylab("air temperate (°C)")+
# theme_bw(base_size = 14)
library(chillR)
## Porto: 41.149178,-8.612112; crs=wgs84
station_list <- handle_gsod(action = "list_stations",
location = c(y = 41.149178, x = -8.612112),
time_interval = c(1973, 2019))
weather <- handle_gsod(action = "download_weather",
location = "085450_99999",
time_interval = c(1973,2019))
weather_cleaned <- handle_gsod(weather)
weather_cleaned <- weather_cleaned$PORTO$weather
write.csv(weather_cleaned, file = "data/gsod_weather_porto.csv", row.names = FALSE)
library(chillR)
library(kableExtra)
## Porto: 41.149178,-8.612112; crs=wgs84
weather <- read.csv("data/gsod_weather_porto.csv")
fixed_weather <- fix_weather(weather)
kable(fixed_weather$QC, caption="Quality control summary produced by *fix_weather()*, with only winter days interpolated") %>%
kable_styling("striped", position = "left", font_size = 10)
| Season | End_year | Season_days | Data_days | Missing_Tmin | Missing_Tmax | Incomplete_days | Perc_complete |
|---|---|---|---|---|---|---|---|
| 1972/1973 | 1973 | 365 | 365 | 3 | 3 | 3 | 99.2 |
| 1973/1974 | 1974 | 365 | 365 | 1 | 0 | 1 | 99.7 |
| 1974/1975 | 1975 | 365 | 365 | 2 | 2 | 2 | 99.5 |
| 1975/1976 | 1976 | 366 | 366 | 2 | 3 | 3 | 99.2 |
| 1976/1977 | 1977 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1977/1978 | 1978 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1978/1979 | 1979 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1979/1980 | 1980 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 1980/1981 | 1981 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1981/1982 | 1982 | 365 | 365 | 4 | 4 | 4 | 98.9 |
| 1982/1983 | 1983 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1983/1984 | 1984 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 1984/1985 | 1985 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1985/1986 | 1986 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1986/1987 | 1987 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1987/1988 | 1988 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 1988/1989 | 1989 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1989/1990 | 1990 | 365 | 365 | 2 | 2 | 2 | 99.5 |
| 1990/1991 | 1991 | 365 | 365 | 5 | 5 | 5 | 98.6 |
| 1991/1992 | 1992 | 366 | 366 | 1 | 1 | 1 | 99.7 |
| 1992/1993 | 1993 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1993/1994 | 1994 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1994/1995 | 1995 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1995/1996 | 1996 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 1996/1997 | 1997 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1997/1998 | 1998 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 1998/1999 | 1999 | 365 | 365 | 1 | 1 | 1 | 99.7 |
| 1999/2000 | 2000 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 2000/2001 | 2001 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2001/2002 | 2002 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2002/2003 | 2003 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2003/2004 | 2004 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 2004/2005 | 2005 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2005/2006 | 2006 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2006/2007 | 2007 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2007/2008 | 2008 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 2008/2009 | 2009 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2009/2010 | 2010 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2010/2011 | 2011 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2011/2012 | 2012 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 2012/2013 | 2013 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2013/2014 | 2014 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2014/2015 | 2015 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2015/2016 | 2016 | 366 | 366 | 0 | 0 | 0 | 100.0 |
| 2016/2017 | 2017 | 365 | 365 | 0 | 0 | 0 | 100.0 |
| 2017/2018 | 2018 | 365 | 365 | 0 | 1 | 1 | 99.7 |
| 2018/2019 | 2019 | 365 | 365 | 1 | 1 | 1 | 99.7 |
# Download data for nearby weather stations to substitute from on large gaps
station_list <- handle_gsod(action = "list_stations",
location = c(y = 41.149178, x = -8.612112),
time_interval = c(1973, 2019))
# Select all Stations closer than 50km
selected_stations_chillR_code <- station_list[station_list$distance <= 50 & station_list$Overlap_years > 0, "chillR_code"]
patch_weather <- list()
for(i in 1:length(selected_stations_chillR_code)){
station_name <- station_list[selected_stations_chillR_code[i]==station_list$chillR_code, "STATION.NAME"]
print(station_name)
patch_weather <- append(patch_weather, list(handle_gsod(handle_gsod(action="download_weather",
location=selected_stations_chillR_code[i],
time_interval=c(1973,2019))[[1]]$weather)))
if(length(patch_weather) < i){
patch_weather[[i]] <- "No Data"
}
names(patch_weather)[i] <- station_name
}
## [1] "PORTO/SERRA DO PILAR"
## [1] "PORTO"
## [1] "OVAR/MACEDA"
## [1] "OVAR"
# Remove No Data values from list
patch_weather <-patch_weather[unlist(lapply(patch_weather, is.data.frame), use.names = FALSE)]
patched <- patch_daily_temperatures(weather = weather,
patch_weather = patch_weather)
porto_weather <- fix_weather(patched)
write.csv(porto_weather$weather,file = "data/porto_weather.csv", row.names = FALSE)
# Weather Generator
library(chillR)
library(tidyverse)
library(ggplot2)
porto_weather <- read.csv("data/porto_weather.csv")
porto_weather <- porto_weather[,c("Year","Month","Day","Tmin","Tmax")]
temperature <- temperature_generation(porto_weather,
years = c(1973, 2019),
sim_years = c(2000, 2099))
temperature_compare <- cbind(porto_weather[
which(porto_weather$Year %in% 1973:2019),],
data_source="observed")
temperature_compare <- rbind(temperature_compare,
cbind(temperature[[1]][,c("Year","Month","Day","Tmin","Tmax")],
data_source="simulated"))
temperature_compare$date <- as.Date(ISOdate(2000,
temperature_compare$Month,
temperature_compare$Day))
ggplot(data=temperature_compare, aes(date,Tmin)) +
geom_smooth(aes(colour = factor(Year))) +
facet_wrap(vars(data_source)) +
theme_bw(base_size = 20) +
theme(legend.position = "none") +
scale_x_date(date_labels = "%b")
ggplot(data=temperature_compare, aes(date,Tmax)) +
geom_smooth(aes(colour = factor(Year))) +
facet_wrap(vars(data_source)) +
theme_bw(base_size = 20) +
theme(legend.position = "none") +
scale_x_date(date_labels = "%b")
# Chill distribution
chill_observed <- chilling(stack_hourly_temps(weather = temperature_compare[which(temperature_compare$data_source=="observed"),],
latitude = 41.1),
Start_JDay = 305,
End_JDay = 59)
chill_simulated <- chilling(stack_hourly_temps(weather = temperature_compare[which(temperature_compare$data_source=="simulated"),],
latitude = 41.1),
Start_JDay = 305,
End_JDay = 59)
chill_comparison <- cbind(chill_observed, data_source = "observed")
chill_comparison <- rbind(chill_comparison, cbind(chill_simulated, data_source = "simulated"))
chill_comparison_full_season <- chill_comparison[which(chill_comparison$Perc_complete == 100),]
# The accumulated chill portions for the full season
ggplot(chill_comparison_full_season, aes(x=Chill_portions)) +
geom_histogram(binwidth=1,aes(fill = factor(data_source))) +
theme_bw(base_size = 20) +
labs(fill = "Data source") +
xlab("Chill accumulation (Chill Portions)") +
ylab("Frequency")
# Probability of reaching a specific number of chill portion
chill_simulations<-chill_comparison_full_season[which(chill_comparison_full_season$data_source=="simulated"),]
ggplot(chill_simulations, aes(x=Chill_portions)) +
stat_ecdf(geom = "step",lwd=1.5,col="blue") +
ylab("Cumulative probability") +
xlab("Chill accumulation (in Chill Portions)") +
theme_bw(base_size = 20)
library(chillR)
library(ggplot2)
# Load data
porto_weather <- read.csv("data/porto_weather.csv")
porto_weather <- porto_weather[,c("Year","Month","Day","Tmin","Tmax")]
baseline_scenario <- temperature_scenario_from_records(weather = porto_weather,
year = 1996)
all_past_scenarios <- temperature_scenario_from_records(weather = porto_weather,
year = c(1975, 1985, 1995, 2005, 2015))
adjusted_scenarios <- temperature_scenario_baseline_adjustment(baseline_temperature_scenario = baseline_scenario,
temperature_scenario = all_past_scenarios)
all_scenario_temps <- temperature_generation(porto_weather,
years = c(1973, 2019),
sim_years = c(2000,2099),
temperature_scenario = adjusted_scenarios)
chill_hist_scenario_list <- tempResponse_daily_list(all_scenario_temps,
latitude=41.1,
Start_JDay = 305,
End_JDay = 59)
scenarios <- names(chill_hist_scenario_list)[1:6]
all_scenarios<-chill_hist_scenario_list[[scenarios[1]]]
all_scenarios[,"scenario"]<-as.numeric(scenarios[1])
# Loop through the other scenarios
for (sc in scenarios[2:5]){
all_scenarios<-rbind(all_scenarios,
cbind(chill_hist_scenario_list[[sc]],scenario=as.numeric(sc)))
}
all_scenarios<-all_scenarios[which(all_scenarios$Perc_complete==100),]
# Actual chill metrics in this period for comparison
actual_chill<-tempResponse_daily_list(porto_weather, latitude=41.1,
Start_JDay = 305,
End_JDay = 59)[[1]]
actual_chill<-actual_chill[which(actual_chill$Perc_complete==100),]
# There doesn't seem to be a clear trend in chill portion changes over the years
ggplot(data = all_scenarios, aes(scenario, Chill_Portions, fill = factor(scenario)))+
geom_violin()+
ylab("Chill accumulation (Chill Portions)") +
xlab("Scenario year") +
theme_bw(base_size=15)+
geom_point(data=actual_chill,
aes(End_year,Chill_Portions,fill="blue"),
col="blue",show.legend = FALSE)+
scale_fill_discrete(name="Scenario", breaks = unique(all_scenarios$scenario))
# Future temperature scenarios (Chapter 14)
library(chillR)
# Loading downloaded data set
porto_weather <- read.csv("data/porto_weather.csv")
# Get Climate secenario data
RCPs <- c("rcp45", "rcp85")
Times <- c(2050, 2085)
for (RCP in RCPs)
for (Time in Times)
{
start_year <- Time - 15
end_year <- Time + 15
clim_scen <- getClimateWizardData(
c(longitude = -8.612112, latitude = 41.149178),
RCP,
start_year,
end_year,
temperature_generation_scenarios = TRUE,
baseline = c(1975, 2005),
metric = "monthly_min_max_temps",
GCMs = "all"
)
save_temperature_scenarios(clim_scen,
"data/ClimateWizard",
paste0("porto_futures_", Time, "_", RCP))
}
# Baseline adjustment
scenario_1990 <- temperature_scenario_from_records(porto_weather, 1990)
scenario_1996 <- temperature_scenario_from_records(porto_weather, 1996)
adjustment_scenario <- temperature_scenario_baseline_adjustment(scenario_1996,
scenario_1990)
#adjustment_scenario
RCPs<-c("rcp45","rcp85")
Times<-c(2050,2085)
for(RCP in RCPs)
for(Time in Times)
{
clim_scen<-load_ClimateWizard_scenarios(
"data/ClimateWizard",
paste0("porto_futures_",Time,"_",RCP))
clim_scen_adjusted<-
temperature_scenario_baseline_adjustment(
baseline_temperature_scenario=adjustment_scenario,
temperature_scenario=clim_scen)
Temps<-temperature_generation(
weather=porto_weather,
years = c(1973, 2019),
sim_years=c(2001, 2101),
temperature_scenario = clim_scen_adjusted)
save_temperature_scenarios(
Temps,
"data/Weather",
paste0("porto_",Time,"_",RCP))
}
# Adding historic scenarios
all_past_scenarios<-temperature_scenario_from_records(
weather=porto_weather,
year=c(1980,1990,2000,2010))
adjusted_scenarios<-temperature_scenario_baseline_adjustment(
baseline=scenario_1996,
temperature_scenario = all_past_scenarios)
all_past_scenario_temps <- temperature_generation(
weather = porto_weather,
years = c(1973, 2019),
sim_years = c(2001, 2101),
temperature_scenario = adjusted_scenarios
)
save_temperature_scenarios(all_past_scenario_temps,
"data/Weather",
"porto_historic")
# Add our own and some existing models
frost_model <- function(x)
step_model(x,
data.frame(
lower = c(-1000, 0),
upper = c(0, 1000),
weight = c(1, 0)
))
models <- list(Chill_CP = Dynamic_Model,
Heat_GDH = GDH,
Frost_H = frost_model)
# Calculate chill for observed and historic scenarios
Temps <- load_temperature_scenarios("data/Weather", "porto_historic")
chill_past_scenarios <- tempResponse_daily_list(
Temps,
latitude = 41.149178,
Start_JDay = 305,
End_JDay = 59,
models = models,
misstolerance = 10
)
chill_observed <- tempResponse_daily_list(
porto_weather,
latitude = 41.149178,
Start_JDay = 305,
End_JDay = 59,
models = models,
misstolerance = 10
)
save_temperature_scenarios(chill_past_scenarios,
"data/chill",
"porto_historic")
save_temperature_scenarios(chill_observed,
"data/chill",
"porto_observed")
# Plot chill scenarios
chill_past_scenarios<-load_temperature_scenarios(
"data/chill",
"porto_historic")
chill_observed<-load_temperature_scenarios(
"data/chill",
"porto_observed")
chills <- make_climate_scenario(
chill_past_scenarios,
caption = "Historic",
historic_data = chill_observed,
time_series = TRUE
)
plot_climate_scenarios(
climate_scenario_list=chills,
metric="Chill_CP",
metric_label="Chill (Chill Portions)")
## [[1]]
## [1] "time series labels"
# Apply same procedure on future data
for(RCP in RCPs)
for(Time in Times)
{
Temps<-load_temperature_scenarios(
"data/Weather",
paste0("porto_",Time,"_",RCP))
chill<-tempResponse_daily_list(
Temps,
latitude=41.149178,
Start_JDay = 305,
End_JDay = 59,
models=models,
misstolerance = 10)
save_temperature_scenarios(
chill,
"data/chill",
paste0("porto_",Time,"_",RCP))
}
# Name each scenario
for(RCP in RCPs)
for(Time in Times)
{
chill<-load_temperature_scenarios(
"data/chill",
paste0("porto_",Time,"_",RCP))
if(RCP=="rcp45") RCPcaption <- "RCP4.5"
if(RCP=="rcp85") RCPcaption <- "RCP8.5"
if(Time=="2050") Time_caption <- "2050"
if(Time=="2085") Time_caption <- "2085"
chills <-make_climate_scenario(
chill,
caption =c(RCPcaption, Time_caption),
add_to = chills)
}
# Plotting each chill model
# Chilling Portions
plot_climate_scenarios(
climate_scenario_list=chills,
metric="Chill_CP",
metric_label="Chill (Chill Portions)",
texcex=1.5)
## [[1]]
## [1] "time series labels"
##
## [[2]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[3]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[4]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[5]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
# Growing Degree Dates
plot_climate_scenarios(
climate_scenario_list=chills,
metric="Heat_GDH",
metric_label="Heat (Growing Degree Hours)",
texcex=1.5)
## [[1]]
## [1] "time series labels"
##
## [[2]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[3]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[4]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[5]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
# Frost hours
plot_climate_scenarios(
climate_scenario_list=chills,
metric="Frost_H",
metric_label="Frost hours",
texcex=1.5)
## [[1]]
## [1] "time series labels"
##
## [[2]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[3]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[4]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
##
## [[5]]
## code Label
## 1 1 bcc-csm1-1
## 2 2 BNU-ESM
## 3 3 CanESM2
## 4 4 CESM1-BGC
## 5 5 MIROC-ESM
## 6 6 CNRM-CM5
## 7 7 ACCESS1-0
## 8 8 CSIRO-Mk3-6-0
## 9 9 GFDL-CM3
## 10 10 GFDL-ESM2G
## 11 11 GFDL-ESM2M
## 12 12 inmcm4
## 13 13 IPSL-CM5A-LR
## 14 14 IPSL-CM5A-MR
## 15 15 CCSM4
Load chill scenarios and annotate them.
chill_past_scenarios <- load_temperature_scenarios("data/chill",
"porto_historic")
chill_observed <- load_temperature_scenarios("data/chill",
"porto_observed")
chills <- make_climate_scenario(
chill_past_scenarios,
caption = "Historic",
historic_data = chill_observed,
time_series = TRUE
)
RCPs <- c("rcp45", "rcp85")
Times <- c(2050, 2085)
for (RCP in RCPs)
for (Time in Times)
{
chill <- load_temperature_scenarios("data/chill",
paste0("porto_", Time, "_", RCP))
if (RCP == "rcp45")
RCPcaption <- "RCP4.5"
if (RCP == "rcp85")
RCPcaption <- "RCP8.5"
if (Time == "2050")
Time_caption <- "2050"
if (Time == "2085")
Time_caption <- "2085"
chills <- make_climate_scenario(chill,
caption = c(RCPcaption, Time_caption),
add_to = chills)
}
Reformat data into a ggplot suitable form.
# Iterate through each chill Scenario and name it.
for(nam in names(chills[[1]]$data))
{
ch <- chills[[1]]$data[[nam]]
ch[, "GCM"] <- "none"
ch[, "RCP"] <- "none"
ch[, "Year"] <- as.numeric(nam)
if (nam == names(chills[[1]]$data)[1])
past_simulated <- ch
else
past_simulated <- rbind(past_simulated, ch)
}
# Add value 'Historic' for the new 'Scenario' column
past_simulated["Scenario"] <- "Historic"
# Rename the historic data for convenience
past_observed <- chills[[1]][["historic_data"]]
# Same for future data
for (i in 2:length(chills))
for (nam in names(chills[[i]]$data))
{
ch <- chills[[i]]$data[[nam]]
ch[, "GCM"] <- nam
ch[, "RCP"] <- chills[[i]]$caption[1]
ch[, "Year"] <- chills[[i]]$caption[2]
if (i == 2 & nam == names(chills[[i]]$data)[1])
future_data <- ch
else
future_data <- rbind(future_data, ch)
}
source(file = "treephenology_functions.R")
plot_scenarios_gg(past_observed=past_observed,
past_simulated=past_simulated,
future_data=future_data,
metric="Heat_GDH",
axis_label="Heat (in Growing Degree Hours)")
plot_scenarios_gg(past_observed=past_observed,
past_simulated=past_simulated,
future_data=future_data,
metric="Chill_CP",
axis_label="Chill (in Chill Portions)")
plot_scenarios_gg(past_observed=past_observed,
past_simulated=past_simulated,
future_data=future_data,
metric="Frost_H",
axis_label="Frost duration (in hours)")
# Model Collection we are using
hourly_models <- list(
Chilling_units = chilling_units,
Low_chill = low_chill_model,
Modified_Utah = modified_utah_model,
North_Carolina = north_carolina_model,
Positive_Utah = positive_utah_model,
Chilling_Hours = Chilling_Hours,
Utah_Chill_Units = Utah_Model,
Chill_Portions = Dynamic_Model
)
daily_models <- list(
Rate_of_Chill = rate_of_chill,
Chill_Days = chill_days,
Exponential_Chill = exponential_chill,
Triangula_Chill_Haninnen = triangular_chill_1,
Triangular_Chill_Legave = triangular_chill_2
)
metrics <- c(names(daily_models), names(hourly_models))
model_labels = c(
"Rate of Chill",
"Chill Days",
"Exponential Chill",
"Triangular Chill (Häninnen)",
"Triangular Chill (Legave)",
"Chilling Units",
"Low-Chill Chill Units",
"Modified Utah Chill Units",
"North Carolina Chill Units",
"Positive Utah Chill Units",
"Chilling Hours",
"Utah Chill Units",
"Chill Portions"
)
data.frame(Metric = model_labels, 'Function name' = metrics)
## Metric Function.name
## 1 Rate of Chill Rate_of_Chill
## 2 Chill Days Chill_Days
## 3 Exponential Chill Exponential_Chill
## 4 Triangular Chill (Häninnen) Triangula_Chill_Haninnen
## 5 Triangular Chill (Legave) Triangular_Chill_Legave
## 6 Chilling Units Chilling_units
## 7 Low-Chill Chill Units Low_chill
## 8 Modified Utah Chill Units Modified_Utah
## 9 North Carolina Chill Units North_Carolina
## 10 Positive Utah Chill Units Positive_Utah
## 11 Chilling Hours Chilling_Hours
## 12 Utah Chill Units Utah_Chill_Units
## 13 Chill Portions Chill_Portions
# Apply all Models to historic weather data
porto_temps <- read_tab("data/porto_weather.csv")
Temps <-
load_temperature_scenarios("data/Weather", "porto_historic")
Start_JDay <- 305
End_JDay <- 59
# apply daily models to past scenarios
daily_models_past_scenarios <- tempResponse_list_daily(Temps,
Start_JDay = Start_JDay,
End_JDay = End_JDay,
models = daily_models)
daily_models_past_scenarios <- lapply(daily_models_past_scenarios,
function(x)
x[which(x$Perc_complete > 90),])
# apply hourly models to past scenarios
hourly_models_past_scenarios <- tempResponse_daily_list(
Temps,
latitude = 41.149178,
Start_JDay = Start_JDay,
End_JDay = End_JDay,
models = hourly_models,
misstolerance = 10
)
past_scenarios <- daily_models_past_scenarios
past_scenarios <- lapply(names(past_scenarios),
function(x)
cbind(past_scenarios[[x]],
hourly_models_past_scenarios[[x]][, names(hourly_models)]))
names(past_scenarios) <- names(daily_models_past_scenarios)
# apply daily models to past observations
daily_models_observed <- tempResponse_daily(
porto_temps,
Start_JDay = Start_JDay,
End_JDay = End_JDay,
models = daily_models
)
daily_models_observed <-
daily_models_observed[which(daily_models_observed$Perc_complete > 90),]
# apply hourly models to past observations
hourly_models_observed <- tempResponse_daily_list(
porto_temps,
latitude = 41.149178,
Start_JDay = Start_JDay,
End_JDay = End_JDay,
models = hourly_models,
misstolerance = 10
)
past_observed <- cbind(daily_models_observed,
hourly_models_observed[[1]][, names(hourly_models)])
# save all the results
save_temperature_scenarios(past_scenarios,
"data/chill",
"porto_multichill_historic")
write.csv(past_observed,
"data/chill/porto_multichill_observed.csv",
row.names = FALSE)
# Future
RCPs <- c("rcp45", "rcp85")
Times <- c(2050, 2085)
for (RCP in RCPs)
for (Time in Times)
{
Temps <- load_temperature_scenarios("data/Weather",
paste0("porto_", Time, "_", RCP))
daily_models_future_scenarios <-
tempResponse_list_daily(Temps,
Start_JDay = Start_JDay,
End_JDay = End_JDay,
models = daily_models)
daily_models_future_scenarios <-
lapply(daily_models_future_scenarios,
function(x)
x[which(x$Perc_complete > 90),])
hourly_models_future_scenarios <-
tempResponse_daily_list(
Temps,
latitude = 41.149178,
Start_JDay = Start_JDay,
End_JDay = End_JDay,
models = hourly_models,
misstolerance = 10
)
future_scenarios <- daily_models_future_scenarios
future_scenarios <- lapply(names(future_scenarios),
function(x)
cbind(future_scenarios[[x]],
hourly_models_future_scenarios[[x]][, names(hourly_models)]))
names(future_scenarios) <-
names(daily_models_future_scenarios)
chill <- future_scenarios
save_temperature_scenarios(chill,
"data/chill",
paste0("porto_multichill_", Time, "_", RCP))
}
# Load multichill data and annotate it
chill_past_scenarios <- load_temperature_scenarios("data/chill",
"porto_multichill_historic")
chill_observed <- read_tab("data/chill/porto_multichill_observed.csv")
chills <- make_climate_scenario(
chill_past_scenarios,
caption = "Historic",
historic_data = chill_observed,
time_series = TRUE
)
RCPs <- c("rcp45", "rcp85")
Times <- c(2050, 2085)
for (RCP in RCPs)
for (Time in Times)
{
chill <- load_temperature_scenarios("data/chill",
paste0("Bonn_multichill_", Time, "_", RCP))
if (RCP == "rcp45")
RCPcaption <- "RCP4.5"
if (RCP == "rcp85")
RCPcaption <- "RCP8.5"
if (Time == "2050")
Time_caption <- "2050"
if (Time == "2085")
Time_caption <- "2085"
chills <- make_climate_scenario(chill,
caption = c(RCPcaption, Time_caption),
add_to = chills)
}
# Compute safe winter chill (SWC)
for (i in 1:length(chills))
{
ch <- chills[[i]]
if (ch$caption[1] == "Historic")
{
GCMs <- rep("none", length(names(ch$data)))
RCPs <- rep("none", length(names(ch$data)))
Years <- as.numeric(ch$labels)
Scenario <- rep("Historic", length(names(ch$data)))
} else
{
GCMs <- names(ch$data)
RCPs <- rep(ch$caption[1], length(names(ch$data)))
Years <- rep(as.numeric(ch$caption[2]), length(names(ch$data)))
Scenario <- rep("Future", length(names(ch$data)))
}
for (nam in names(ch$data))
{
for (met in metrics)
{
temp_res <- data.frame(
Metric = met,
GCM = GCMs[which(nam == names(ch$data))],
RCP = RCPs[which(nam == names(ch$data))],
Year = Years[which(nam == names(ch$data))],
Result = quantile(ch$data[[nam]][, met], 0.1),
Scenario = Scenario[which(nam == names(ch$data))]
)
if (i == 1 & nam == names(ch$data)[1] & met == metrics[1])
results <- temp_res
else
results <- rbind(results, temp_res)
}
}
}
for (met in metrics)
results[which(results$Metric == met), "SWC"] <-
results[which(results$Metric == met), "Result"] /
results[which(results$Metric == met &
results$Year == 1980), "Result"] - 1
# Plotting future data
rng <- range(results$SWC)
p_future <- ggplot(results[which(!results$GCM == "none"), ],
aes(GCM, y = factor(Metric, levels = metrics),
fill = SWC)) +
geom_tile() +
facet_grid(RCP ~ Year) +
theme_bw(base_size = 11) +
theme(axis.text = element_text(size = 8)) +
scale_fill_gradientn(colours = matlab.like(15),
labels = scales::percent,
limits = rng) +
theme(axis.text.x = element_text(
angle = 75,
hjust = 1,
vjust = 1)) +
labs(fill = "Change in\nSafe Winter Chill\nsince 1975") +
scale_y_discrete(labels = model_labels) +
ylab("Chill metric")
# Plotting historic data
p_past <-
ggplot(results[which(results$GCM == "none"), ],
aes(Year, y = factor(Metric, levels = metrics),
fill = SWC)) +
geom_tile() +
theme_bw(base_size = 11) +
theme(axis.text = element_text(size = 8)) +
scale_fill_gradientn(colours = matlab.like(15),
labels = scales::percent,
limits = rng) +
scale_x_continuous(position = "top") +
labs(fill = "Change in\nSafe Winter Chill\nsince 1975") +
scale_y_discrete(labels = model_labels) +
ylab("Chill metric")
# Combine both plots
chill_comp_plot <-
(p_past +
p_future +
plot_layout(
guides = "collect",
nrow = 2,
heights = c(1, 2)
)) &
theme(
legend.position = "right",
strip.background = element_blank(),
strip.text = element_text(face = "bold")
)
chill_comp_plot
hist_results<-results[which(results$GCM=="none"),]
hist_results$RCP<-"RCP4.5"
hist_results_2<-hist_results
hist_results_2$RCP<-"RCP8.5"
hist_results<-rbind(hist_results,hist_results_2)
future_results<-results[which(!results$GCM=="none"),]
GCM_aggregate<-aggregate(
future_results$SWC,
by=list(future_results$Metric,future_results$RCP,future_results$Year),
FUN=mean)
colnames(GCM_aggregate)<-c("Metric","RCP","Year","SWC")
RCP_Time_series<-rbind(hist_results[,c("Metric","RCP","Year","SWC")],
GCM_aggregate)
# Now we make a static plot of chill development over time according to all the
# chill models.
chill_change_plot<-
ggplot(data=RCP_Time_series,
aes(x=Year,y=SWC,col=factor(Metric,levels=metrics))) +
geom_line(lwd=1.3) +
facet_wrap(~RCP,nrow=2) +
theme_bw(base_size=15) +
labs(col = "Change in\nSafe Winter Chill\nsince 1975") +
scale_color_discrete(labels=model_labels) +
scale_y_continuous(labels = scales::percent) +
theme(strip.background = element_blank(),
strip.text = element_text(face = "bold")) +
ylab("Safe Winter Chill")
# Animate plot
animation <- chill_change_plot + transition_reveal(Year)
animate(animation, renderer = gifski_renderer())
# Saving the gif
anim_save("chill_comparison_animation.gif", path = "data", animation = last_animation())
# Loading bloom data for Roter Boskoop
rot_bskp <- read_tab("data/Roter_Boskoop_bloom_1958_2019.csv")
rot_bskp_first <- rot_bskp[, 1:2]
rot_bskp_first[, "Year"] <- substr(rot_bskp_first$First_bloom, 1, 4)
rot_bskp_first[, "Month"] <- substr(rot_bskp_first$First_bloom, 5, 6)
rot_bskp_first[, "Day"] <- substr(rot_bskp_first$First_bloom, 7, 8)
rot_bskp_first <- make_JDay(rot_bskp_first)
rot_bskp_first <- rot_bskp_first[, c("Pheno_year", "JDay")]
colnames(rot_bskp_first) <- c("Year", "pheno")
# Loading Klein Altendorf weather data
KA_temps <- read_tab("data/TMaxTMin1958-2019_patched.csv")
KA_temps <- make_JDay(KA_temps)
PLS_results <- PLS_pheno(KA_temps, rot_bskp_first)
source(file = "treephenology_functions.R")
ggplot_PLS(PLS_results)
# Load temperature and bloom data
temps <- read_tab("data/TMaxTMin1958-2019_patched.csv")
temps_hourly <- stack_hourly_temps(temps, latitude = 50.6)
rot_bskp <- read_tab("data/Roter_Boskoop_bloom_1958_2019.csv")
rot_bskp_first <- rot_bskp[, 1:2]
rot_bskp_first[, "Year"] <- substr(rot_bskp_first$First_bloom, 1, 4)
rot_bskp_first[, "Month"] <-
substr(rot_bskp_first$First_bloom, 5, 6)
rot_bskp_first[, "Day"] <- substr(rot_bskp_first$First_bloom, 7, 8)
rot_bskp_first <- make_JDay(rot_bskp_first)
rot_bskp_first <- rot_bskp_first[, c("Pheno_year", "JDay")]
colnames(rot_bskp_first) <- c("Year", "pheno")
# Calculate daily chill accumulation
daychill <- daily_chill(
hourtemps = temps_hourly,
running_mean = 1,
models = list(
Chilling_Hours = Chilling_Hours,
Utah_Chill_Units = Utah_Model,
Chill_Portions = Dynamic_Model,
GDH = GDH
)
)
tmp <-
make_daily_chill_plot2(
daychill,
metrics = c("Chill_Portions"),
cumulative = FALSE,
startdate = 300,
enddate = 30,
focusyears = c(2008),
metriclabels = "Chill Portions"
)
tmp <-
make_daily_chill_plot2(
daychill,
metrics = c("Chill_Portions"),
cumulative = TRUE,
startdate = 300,
enddate = 30,
focusyears = c(2008),
metriclabels = "Chill Portions"
)
tmp <-
make_daily_chill_plot2(
daychill,
metrics = c("GDH"),
cumulative = FALSE,
startdate = 300,
enddate = 30,
focusyears = c(2008),
metriclabels = "GDH"
)
tmp <-
make_daily_chill_plot2(
daychill,
metrics = c("GDH"),
cumulative = TRUE,
startdate = 300,
enddate = 30,
focusyears = c(2008),
metriclabels = "GDH"
)
rm(tmp)
plscf <- PLS_chill_force(
daily_chill_obj = daychill,
bio_data_frame = rot_bskp_first,
split_month = 6,
chill_models = c("Chilling_Hours", "Utah_Chill_Units", "Chill_Portions"),
heat_models = "GDH",
runn_means = 11
)
source("treephenology_functions.R")
plot_PLS_chill_force(
plscf,
chill_metric = "Chill_Portions",
heat_metric = "GDH",
chill_label = "CP",
heat_label = "GDH",
chill_phase = c(0, 0),
heat_phase = c(0, 0)
)
# Calculate PLS chill force model
plscf <- PLS_chill_force(
daily_chill_obj = daychill,
bio_data_frame = rot_bskp_first,
split_month = 6,
chill_models = c("Chilling_Hours", "Utah_Chill_Units", "Chill_Portions"),
heat_models = "GDH",
runn_means = 11
)
source("treephenology_functions.R")
# Chilling Hours
plot_PLS_chill_force(
plscf,
chill_metric = "Chilling_Hours",
heat_metric = "GDH",
chill_label = "CH",
heat_label = "GDH",
chill_phase = c(-12, 52),
heat_phase = c(-1, 116.5)
)
# Utah Chilling Units
plot_PLS_chill_force(
plscf,
chill_metric = "Utah_Chill_Units",
heat_metric = "GDH",
chill_label = "CU",
heat_label = "GDH",
chill_phase = c(-10, 71),
heat_phase = c(-5, 116.5)
)
# Chill Portions
plot_PLS_chill_force(
plscf,
chill_metric = "Chill_Portions",
heat_metric = "GDH",
chill_label = "CP",
heat_label = "GDH",
chill_phase = c(-26, 64),
heat_phase = c(2, 116.5)
)
source("treephenology_functions.R")
model_sensitivities_porto <- Chill_model_sensitivity(latitude=41.149178,
temp_models=list(Dynamic_Model=Dynamic_Model,
GDH=GDH),
month_range=c(10:12,1:5))
porto_weather <- read_tab("data/porto_weather.csv")
Chill_sensitivity_temps(model_sensitivities_porto,
porto_weather,
temp_model="Dynamic_Model",
month_range=c(10,11,12,1,2,3),
legend_label="Chill per day \n(Chill Portions)") +
ggtitle("Chill model sensitivity at Porto, Portugal")
Chill_sensitivity_temps(model_sensitivities_porto,
porto_weather,
temp_model="GDH",
month_range=c(12,1:5),
legend_label="Heat per day \n(GDH)") +
ggtitle("Heat model sensitivity at Porto, Portugal")
Exercises on evaluating PLS regression results 1. Reproduce the analysis for the ‘Roter Boskoop’ dataset.
# Note: Data loading is the same as in chapter 22 and thus not repeated.
chill_phase <- c(339, 64)
heat_phase <- c(2, 117)
chill <- tempResponse(
hourtemps = temps_hourly,
Start_JDay = chill_phase[1],
End_JDay = chill_phase[2],
models = list(Chill_Portions = Dynamic_Model),
misstolerance = 10
)
heat <- tempResponse(
hourtemps = temps_hourly,
Start_JDay = heat_phase[1],
End_JDay = heat_phase[2],
models = list(GDH = GDH)
)
# Plot chill and heat accumulation distribution
ggplot(data = chill, aes(x = Chill_Portions)) +
geom_histogram() +
ggtitle("Chill accumulation during endodormancy (Chill Portions)") +
xlab("Chill accumulation (Chill Portions)") +
ylab("Frequency between 1958 and 2019") +
theme_bw(base_size = 12)
ggplot(data = heat, aes(x = GDH)) +
geom_histogram() +
ggtitle("Heat accumulation during ecodormancy (GDH)") +
xlab("Heat accumulation (Growing Degree Hours)") +
ylab("Frequency between 1958 and 2019") +
theme_bw(base_size = 12)
source("treephenology_functions.R")
pheno_trend_ggplot(temps=temps,
pheno=rot_bskp_first,
chill_phase=chill_phase,
heat_phase=heat_phase,
phenology_stage="Bloom")
## Warning:
## Grid searches over lambda (nugget and sill variances) with minima at the endpoints:
## (REML) Restricted maximum likelihood
## minimum at right endpoint lambda = 42037.08 (eff. df= 3.000991 )
No exercises today. Maybe you can work on cleaning up your logbook.
# Parameters
# yc, zc, s1, Tu, E0, E1, A0, A1, Tf, Tc, Tb, slope
par <- c(40, 190, 0.5, 25, 3372.8, 9900.3, 6319.5, 5.939917e13, 4, 36, 4, 1.60)
upper <- c(41, 200, 1.0, 30, 4000.0, 10000.0, 7000.0, 6.e13, 10, 40, 10, 50.00)
lower <- c(38, 180, 0.1, 0 , 3000.0, 9000.0, 6000.0, 5.e13, 0, 0, 0, 0.05)
# Generate list with temp tables for each season
years <- c(1959:2018)
SeasonList <- genSeasonList(temps_hourly$hourtemps, mrange = c(8, 6), years=years)
# Fitting parameters
Fit_res <- phenologyFitter(par.guess=par,
modelfn = PhenoFlex_GDHwrapper,
bloomJDays=rot_bskp_first$pheno[which(rot_bskp_first$Year>=years[1])],
SeasonList=SeasonList,
lower=lower,
upper=upper,
control=list(smooth=FALSE, verbose=FALSE, maxit=1000,
nb.stop.improvement=5))
# Gathering all desired results in one table
predictions <- data.frame(season=years,
prediction=Fit_res$pbloomJDays,
pheno = Fit_res$bloomJDays,
error = Fit_res$pbloomJDays - Fit_res$bloomJDays)
ggplot(data = predictions, aes(x = season, y = JDay_to_date(prediction,
2001,
date_format = "%Y-%m-%d"))) +
geom_smooth() +
geom_point() +
ylab("Predicted bloom date on Roter Boskoop in Klein-Altendorf") +
theme_bw(base_size=15)
ggplot(predictions,aes(x=pheno,y=prediction)) +
geom_point() +
geom_abline(intercept=0, slope=1)+
geom_smooth(method = "lm", colour = "firebrick")+
theme_bw(base_size = 15) +
xlab("Observed bloom date (Day of the year)") +
ylab("Predicted bloom date (Day of the year)") +
ggtitle("Predicted vs. observed bloom dates")
ggplot(predictions,aes(error)) +
geom_histogram() +
ggtitle("Distribution of prediction errors")
# Root Mean Square Error of Prediction (RMSEP)
RMSEP(predictions$prediction, predictions$pheno)
## [1] 3.869168
# Mean Error
mean(predictions$error)
## [1] -1.077083
# Mean Absolute Error (MAE)
mean(abs(predictions$error))
## [1] 2.859028
# Create temperature response based on our model
temp_values=seq(-5, 30, 0.1)
simulation_par <- Fit_res$par
source("treephenology_functions.R")
temp_response<-data.frame(Temperature=temp_values,
Chill_response=gen_bell(simulation_par, temp_values),
Heat_response=GDH_response(temp_values,simulation_par))
melted_response<-reshape2::melt(temp_response,id.vars="Temperature")
# Plot temperature response
ggplot(melted_response,aes(x=Temperature,y=value)) +
geom_line(size=2,aes(col=variable)) +
ylab("Temperature response (arbitrary units)") +
xlab("Temperature (°C)") +
facet_wrap(vars(variable),scales="free",
labeller = labeller(variable=c(Chill_response=c("Chill response"),
Heat_response=c("Heat response")))) +
scale_color_manual(values = c("Chill_response" = "blue", "Heat_response" = "red")) +
theme_bw(base_size = 15) +
theme(legend.position = "none")
# Cycling through all Tmin-Tmax kombinations and calculate PhenoFlex Temperature response
latitude<-50.6
month_range<-c(10,11,12,1,2,3)
Tmins=c(-20:20)
Tmaxs=c(-15:30)
mins<-NA
maxs<-NA
chill_eff<-NA
heat_eff<-NA
month<-NA
for(mon in month_range)
{days_month<-as.numeric(difftime( ISOdate(2002,mon+1,1),
ISOdate(2002,mon,1) ))
if(mon==12) days_month<-31
weather<-make_all_day_table(data.frame(Year=c(2001,2002),
Month=c(mon,mon),
Day=c(1,days_month),Tmin=c(0,0),Tmax=c(0,0)))
for(tmin in Tmins)
for(tmax in Tmaxs)
if(tmax>=tmin)
{
weather$Tmin<-tmin
weather$Tmax<-tmax
hourtemps<-stack_hourly_temps(weather,
latitude=latitude)$hourtemps$Temp
chill_eff<-c(chill_eff,
PhenoFlex(temp=hourtemps,
times=c(1: length(hourtemps)),
A0=simulation_par[7],
A1=simulation_par[8],
E0=simulation_par[5],
E1=simulation_par[6],
Tf=simulation_par[9],
slope=simulation_par[12],
deg_celsius=TRUE,
basic_output=FALSE)$y[length(hourtemps)]/
(length(hourtemps)/24))
heat_eff<-c(heat_eff,
cumsum(GDH_response(hourtemps,
simulation_par))[length(hourtemps)]/
(length(hourtemps)/24))
mins<-c(mins,tmin)
maxs<-c(maxs,tmax)
month<-c(month,mon)
}
}
results<-data.frame(Month=month,Tmin=mins,Tmax=maxs,Chill_eff=chill_eff,Heat_eff=heat_eff)
results<-results[!is.na(results$Month),]
source("treephenology_functions.R")
Chill_sensitivity_temps(results,
temps_hourly$hourtemps,
temp_model="Chill_eff",
month_range=c(10,11,12,1,2,3),
Tmins=c(-20:20),
Tmaxs=c(-15:30),
legend_label="Chill per day \n(arbitrary)") +
ggtitle("PhenoFlex chill efficiency ('Roter Boskoop')")
Chill_sensitivity_temps(results,
temps_hourly$hourtemps,
temp_model="Heat_eff",
month_range=c(10,11,12,1,2,3),
Tmins=c(-20:20),
Tmaxs=c(-15:30),
legend_label="Heat per day \n(arbitrary)") +
ggtitle("PhenoFlex heat efficiency ('Roter Boskoop')")
roter_boskoop<-melt(rot_bskp,
id.vars = "Pheno_year",
value.name="YEARMODA")
roter_boskoop$Year<-as.numeric(substr(roter_boskoop$YEARMODA,1,4))
roter_boskoop$Month<-as.numeric(substr(roter_boskoop$YEARMODA,5,6))
roter_boskoop$Day<-as.numeric(substr(roter_boskoop$YEARMODA,7,8))
roter_boskoop<-make_JDay(roter_boskoop)
ggplot(data=roter_boskoop,aes(Pheno_year,JDay,col=variable)) +
geom_line() +
geom_smooth(alpha = 0.2) +
theme_bw(base_size=15) +
scale_color_discrete(
name="Phenological event",
labels=c("First bloom", "Full bloom", "Last bloom")) +
xlab("Phenological year") +
ylab("Julian date (day of the year)")
3. Evaluate the occurrence of frost events at Klein-Altendorf since 1958. Illustrate this in a plot.
frost_df=data.frame(
lower=c(-1000,0),
upper=c(0,1000),
weight=c(1,0))
frost_model<-function(x) step_model(x,frost_df)
frost<-tempResponse(temps_hourly,models=c(frost=frost_model))
frost_model_no_summ<-function(x) step_model(x, frost_df, summ=FALSE)
temps_hourly$hourtemps[,"frost"]<-frost_model_no_summ(temps_hourly$hourtemps$Temp)
Daily_frost_hours<-aggregate(temps_hourly$hourtemps$frost,
by=list(temps_hourly$hourtemps$YEARMODA),
FUN=sum)
Daily_frost<-make_JDay(temps)
Daily_frost[,"Frost_hours"]<-Daily_frost_hours$x
ggplot(frost,aes(End_year,frost)) +
geom_smooth() +
geom_point() +
ylim(c(0,NA)) +
ylab("Frost hours per year") +
xlab("Year")
frost_model_no_summ<-function(x) step_model(x, frost_df, summ=FALSE)
temps_hourly$hourtemps[,"frost"]<-frost_model_no_summ(temps_hourly$hourtemps$Temp)
Daily_frost_hours<-aggregate(temps_hourly$hourtemps$frost,
by=list(temps_hourly$hourtemps$YEARMODA),
FUN=sum)
Daily_frost<-make_JDay(temps)
Daily_frost[,"Frost_hours"]<-Daily_frost_hours$x
Daily_frost$Frost_hours[which(Daily_frost$Frost_hours==0)]<-NA
Ribbon_Rot<-dcast(roter_boskoop,Pheno_year ~ variable, value.var = "JDay")
lookup_dates<-Ribbon_Rot
row.names(lookup_dates)<-lookup_dates$Pheno_year
Daily_frost[,"First_bloom"]<-
lookup_dates[as.character(Daily_frost$Year),"First_bloom"]
Daily_frost[,"Last_bloom"]<-
lookup_dates[as.character(Daily_frost$Year),"Last_bloom"]
Daily_frost[
which(!is.na(Daily_frost$Frost_hours)),"Bloom_frost"]<-
"Before bloom"
Daily_frost[
which(Daily_frost$JDay>=Daily_frost$First_bloom),"Bloom_frost"]<-
"During bloom"
Daily_frost[
which(Daily_frost$JDay>Daily_frost$Last_bloom),"Bloom_frost"]<-
"After bloom"
Daily_frost[
which(Daily_frost$JDay>180),"Bloom_frost"]<-
"Before bloom"
ggplot(data=Ribbon_Rot,aes(Pheno_year)) +
geom_ribbon(aes(ymin = First_bloom, ymax = Last_bloom),
fill = "light gray") +
geom_line(aes(y = Full_bloom)) +
theme_bw(base_size=15) +
xlab("Phenological year") +
ylab("Julian date (day of the year)") +
geom_point(data=Daily_frost,
aes(Year,JDay,size=Frost_hours,col=Bloom_frost),
alpha = 0.8) +
scale_size(range = c(0, 5),
breaks = c(1, 5, 10, 15, 20),
labels = c("1", "5", "10", "15", "20"),
name="Frost hours") +
scale_color_manual(
breaks=c("Before bloom", "During bloom", "After bloom"),
values=c("light green","red","light blue"),
name="Frost timing") +
theme_bw(base_size=15) +
ylim(c(80,160))
Bloom_frost_trend<-aggregate(
Daily_frost$Frost_hours,
by=list(Daily_frost$Year,Daily_frost$Bloom_frost),
FUN=function(x) sum(x,na.rm=TRUE))
colnames(Bloom_frost_trend)<-c("Year","Frost_timing","Frost_hours")
DuringBloom<-
Bloom_frost_trend[which(Bloom_frost_trend$Frost_timing=="During bloom"),]
ggplot(data=DuringBloom,aes(Year,Frost_hours)) +
geom_col()
Kendall(x=DuringBloom$Year,y=DuringBloom$Frost_hours)
## tau = 0.0333, 2-sided pvalue =0.74038
lm(DuringBloom$Frost_hours~DuringBloom$Year)
##
## Call:
## lm(formula = DuringBloom$Frost_hours ~ DuringBloom$Year)
##
## Coefficients:
## (Intercept) DuringBloom$Year
## -91.48652 0.04786